Running Noddy in batch mode

Idea: use Noddy to generate block models with parameterised tectonic kinematic history through the batch implmentation of Noddy. Write simple functions to parse history files and make changes, and to parse and visualise the output files. Also check how easily it is possible to generate VTK files from block models as output for visualisation with Paraview.

Calling Noddy from Python


In [1]:
import sys, os
import subprocess
noddyprogram = os.path.realpath(r'../../noddy')
example_directory = os.path.realpath(r'../../docs/examples')
# history_file = 'noddy_first_test1.his'
history_file = 'noddy_one_fault.his'
history = os.path.join(example_directory, history_file)
output_file = 'noddy_out'
# set Pythonpath to pynoddy
sys.path.append(os.path.realpath(r"../../python"))
print os.getcwd()
print example_directory
os.listdir(example_directory)
print history


/Users/flow/git/floddy/python/notebooks
/Users/flow/git/floddy/docs/examples
/Users/flow/git/floddy/docs/examples/noddy_one_fault.his

In [2]:
# call Noddy
print subprocess.Popen([noddyprogram, history, output_file], 
                       shell=False, stderr=subprocess.PIPE, 
                       stdout=subprocess.PIPE).stdout.read()
# next step: include check to wait until process is finished and:
# open noddyBatchProgress.txt file to check if process is complete




In [3]:
import pynoddy

In [4]:
print os.getcwd()


/Users/flow/git/floddy/python/notebooks

Convert results to numpy array

First step: load results and convert to numpy array


In [5]:
lines = open(output_file + ".g12").readlines()
for line in lines:
    if line == '': continue
    l = [l1 for l1 in line.split("\t") if l1 != '']

In [6]:
import numpy as np

In [7]:
f = open(output_file + ".g12")
block = np.loadtxt(f, dtype="int")

In [8]:
# Unfortunately, the data is not quite in the right shape...
np.shape(block)


Out[8]:
(1250, 35)

In [9]:
# reshape to proper 3-D shape
nx = 35 # 140
ny = 50 # 200
nz = 25 # 100
# nx = 140
# ny = 200
# nz = 100
block = block.reshape((nz,ny,nx))
# note to do:
# - swap x and y axes
# - read dimensions from .g00 output file

In [10]:
import matplotlib.pyplot as plt

In [11]:
plt.imshow(block[10,:,:], interpolation ='nearest', aspect='equal')


Out[11]:
<matplotlib.image.AxesImage at 0x109b31690>

In [12]:
plt.imshow(block[:,10,:], interpolation ='nearest', aspect=1)


Out[12]:
<matplotlib.image.AxesImage at 0x109bec4d0>

In [13]:
plt.imshow(block[:,:,20], interpolation ='nearest', aspect=1)


Out[13]:
<matplotlib.image.AxesImage at 0x109cbc490>

In [14]:
from evtk.hl import gridToVTK
# define x, y, z ranges and export to VTK
lx, ly, lz = 7000., 10000.0, 5000.0
dx, dy, dz = lx/nx, ly/ny, lz/nz
# Coordinates
x = np.arange(0, lx + 0.1*dx, dx, dtype='float64')
y = np.arange(0, ly + 0.1*dy, dy, dtype='float64')
z = np.arange(0, lz + 0.1*dz, dz, dtype='float64')

x1 = np.linspace(0,1000,nx)
y1 = np.linspace(0,500,ny)
z1 = np.linspace(0,300,nz)

gridToVTK("./geology", z, y, x, cellData = {"geology" : block}) 
gridToVTK("./geology_points", z1, y1, x1, pointData = {"geology" : block})


Out[14]:
'/Users/flow/git/floddy/python/notebooks/geology_points.vtr'

Wrap model computation and export to VTK

Create a function to wrap model computation, export to a block model, translation into a numpy array, and export to vtk into functions


In [15]:
def compute_and_export_model(history_filename, 
                             vtk_filename="./noddy_block",
                             **kwds):
    """Compute model from history file and export block model to VTK
    
    **Arguments**:
        - *history_filename* = string : name of noddy history file
        - *vtk_filename* = string : name of VTK file to store model
    **Optional Keywords**:
        - *save_vtk* = True/ False : save to vtk (default: True)
    """
    # translate keywords
    save_vtk = kwds.get("save_vtk", "True")
    
    # call Noddy
    output_filename = 'tmp'
    subprocess.Popen([noddyprogram, history_filename, output_filename], 
                           shell=False, stderr=subprocess.PIPE, 
                           stdout=subprocess.PIPE).stdout.read()
    # now read output file and create numpy grid
    f = open(output_filename + ".g12")
    block = np.loadtxt(f, dtype="int")
    # reshape to proper 3-D shape
    #
    # NOTE: to do: extract nx, ny, nz from .g00 file!
    #
    nx = 140
    ny = 200
    nz = 100
    # nx = 140
    # ny = 200
    # nz = 100
    # print block.shape
    block = block.reshape((nz,ny,nx))
    # define x, y, z ranges and export to VTK
    lx, ly, lz = 7000., 10000.0, 5000.0
    dx, dy, dz = lx/nx, ly/ny, lz/nz
    # Coordinates
    x = np.arange(0, lx + 0.1*dx, dx, dtype='float64')
    y = np.arange(0, ly + 0.1*dy, dy, dtype='float64')
    z = np.arange(0, lz + 0.1*dz, dz, dtype='float64')
    
    x1 = np.linspace(0,1000,nx)
    y1 = np.linspace(0,500,ny)
    z1 = np.linspace(0,300,nz)
    
    if save_vtk:
        gridToVTK(vtk_filename, z, y, x, cellData = {"geology" : block}) 
    else:
        return block

Change some things in the input file

Now: change some aspects in the input (history) file and run Noddy and VTK export again


In [16]:
history_file = 'noddy_two_faults.his'
# history_file = 'noddy_one_fault.his'
history = os.path.join(example_directory, history_file)
f = open(history, 'r')
lines = f.readlines()
# create copy for changes
lines_new = lines[:]

In [17]:
# to change geology cube size:
cube_size = 50.00
for i,line in enumerate(lines):
    if 'Geophysics Cube Size' in line: 
        print line
        l = line.split('=')
        l_new = '%7.2f\r\n' % cube_size
        line_new = l[0] + "=" + l_new
        lines_new[i] = line_new


	Geophysics Cube Size	= 200.00


In [18]:
# Find dip and dip direction of fault
dip_dir = 90
dip = 40
for i,line in enumerate(lines):
    if 'Dip' in line \
    and 'Mag' not in line \
    and 'Borehole' not in line\
    and 'Name' not in line:
        print line
        l = line.split('=')
        if "Direction" in line:
            l_new = '%7.2f\r\n' % dip_dir
            line_new = l[0] + "=" + l_new
            lines_new[i] = line_new
        else:
            l_new = '%7.2f\r\n' % dip
            line_new = l[0] + "=" + l_new
            lines_new[i] = line_new


	Dip Direction	= 104.00

	Dip	=  73.00

	Dip Direction	= 271.00

	Dip	= 127.00


In [19]:
# save to file, recompute and export
# new_file = "noddy_one_fault_new.his"
new_file = "noddy_two_faults_new.his"
f = open(new_file, "w")
for line in lines_new:
    f.write(line)
f.close()
    
compute_and_export_model(new_file, "geology_two_faults")

First uncertainty test

Now implement a little probabilistic study: block model with uncertain fault properties! Analyse as 3-D probability fields and, of course, entropy!

Noddy uncertainty test: create n models with uncertain dip, dip direction and slip


In [20]:
# set Monte Carlo properties and pdfs for parameters
n = 100 # number of draws
# dip_mean = 65.
dip_stdev = 10.
# dip_direction_mean = 90.
dip_dir_stdev = 20.
# slip_mean = 1000.
slip_stdev = 400.
cube_size = 50.

In [21]:
# read template file
f_template = open(history, 'r')
lines_template = f_template.readlines()
f_template.close()
# now perform sampling and export models to numpy arrays
nx = 140
ny = 200
nz = 100
all_results = np.ndarray((n, nz, ny, nx))
for j in range(n):
    if (j%(n/10) == 0):
        print "Calculate model %3d of %d" % (j, n)
    # create copy for changes
    lines_new = lines_template[:]
    # draw samples for dip, dip direction and slip: consider mean as input value
    # dip_direction = np.random.normal(dip_direction_mean, dip_direction_stdev)
    # slip = np.random.normal(slip_mean, slip_stdev)
    
    for i,line in enumerate(lines):
        if 'Dip' in line \
        and 'Mag' not in line \
        and 'Borehole' not in line\
        and 'Name' not in line:
            # print line
            l = line.split('=')
            if "Direction" in line:
                dip_dir_ori = float(l[1])
                dip_dir = np.random.normal(dip_dir_ori, dip_dir_stdev)
                l_new = '%7.2f\r\n' % dip_dir
                line_new = l[0] + "=" + l_new
                lines_new[i] = line_new
            else:
                dip_ori = float(l[1])
                dip = np.random.normal(dip_ori, dip_stdev)
                l_new = '%7.2f\r\n' % dip
                line_new = l[0] + "=" + l_new
                lines_new[i] = line_new
        if 'Slip' in line:
            l = line.split('=')
            slip_ori = float(l[1])
            slip = np.random.normal(slip_ori, slip_stdev)
            l_new = '%7.2f\r\n' % slip
            line_new = l[0] + "=" + l_new
            lines_new[i] = line_new
 
        if 'Geophysics Cube Size' in line: 
            # print line
            l = line.split('=')
            l_new = '%7.2f\r\n' % cube_size
            line_new = l[0] + "=" + l_new
            lines_new[i] = line_new
            
    # write to temporary history file
    history_tmp = "history_tmp.his"
    f = open(history_tmp, "w")
    for line in lines_new:
        f.write(line)
    f.close()    
    
    # now compute and export block model to numpy array
    block = compute_and_export_model(history_tmp, save_vtk=False)
    all_results[j,:,:,:] = block


Calculate model   0 of 100
Calculate model  10 of 100
Calculate model  20 of 100
Calculate model  30 of 100
Calculate model  40 of 100
Calculate model  50 of 100
Calculate model  60 of 100
Calculate model  70 of 100
Calculate model  80 of 100
Calculate model  90 of 100

In [22]:
# define x, y, z ranges and export to VTK
lx, ly, lz = 7000., 10000.0, 5000.0
dx, dy, dz = lx/nx, ly/ny, lz/nz
# Coordinates
x = np.arange(0, lx + 0.1*dx, dx, dtype='float64')
y = np.arange(0, ly + 0.1*dy, dy, dtype='float64')
z = np.arange(0, lz + 0.1*dz, dz, dtype='float64')

x1 = np.linspace(0,1000,nx)
y1 = np.linspace(0,500,ny)
z1 = np.linspace(0,300,nz)

# Determine probability fields
n_layers = 7
prob_layers = np.ndarray((n_layers, nz, ny, nx))
for l in range(n_layers):
    layer = all_results == l+1
    layer_sum = np.sum(layer, axis=0)
    layer_prob = layer_sum / float(n)
    prob_layers[l] = layer_prob
    
    # export ot vtk
    gridToVTK("prob_layer_%d" % (l+1), z, y, x, cellData = {"layer %d" % (l+1) : layer_prob})

Grand Finale: calculate and plot information entropy

The next step is now to perform a detailed information theoretic analysis, comparing prior and posterior. The first step is simply to calculate the information entropy from the probability fields with Shannon's famous equation

$H = - \sum_{i=1}^n p(x_i) \log p(x_i)$


In [23]:
# from progressbar import *# just a simple progress bar
# widgets =['Test: ',Percentage(),' ',Bar(marker='0',left='[',right=']'),' ', ETA(),' ',FileTransferSpeed()]
#see docs for other options


# From mutual information paper:
def info_entropy(input_list):
    """calculate and return information entropy of an array/ list to base 2"""
    if not np.allclose(np.sum(input_list), 1., rtol=0.1):
        print("Problem with input table: sum not equal to 1!")
        return None
    h = np.sum(- np.take(input_list, np.nonzero(input_list)) * \
               np.log2(np.take(input_list, np.nonzero(input_list))))
    # old implementation with loop:
    #    h = 0
    #    for i in input_list:
    #        if i > 0:
    #            h -= i * numpy.log2(i)
    return h

# function to create entropy array from all layer probabilities
def info_entropy_model(prob_layers):
    """Calculate information entropy given the layer probabilities"""
    H = np.ndarray((nz, ny, nx))
    n_total = nx * ny * nz
    count = 0
#    pbar =ProgressBar(widgets=widgets, maxval=(nx*ny*nz)) 
#    pbar.start()
    for k in range(nz):
        count += 1
        if (count % (nz/10)) == 0:
            print("Calculate layer %3d of %d" % (count, nz))

        for j in range(ny):
            for i in range(nx):
#                pbar.update(count)
                H[k,j,i] = info_entropy(prob_layers[:,k,j,i])
    return H

In [24]:
H = info_entropy_model(prob_layers)


Calculate layer  10 of 100
Calculate layer  20 of 100
Calculate layer  30 of 100
Calculate layer  40 of 100
Calculate layer  50 of 100
Calculate layer  60 of 100
Calculate layer  70 of 100
Calculate layer  80 of 100
Calculate layer  90 of 100
Calculate layer 100 of 100

In [25]:
# Write entropy to VTK
# define x, y, z ranges and export to VTK
lx, ly, lz = 7000., 10000.0, 5000.0
dx, dy, dz = lx/nx, ly/ny, lz/nz
# Coordinates
x = np.arange(0, lx + 0.1*dx, dx, dtype='float64')
y = np.arange(0, ly + 0.1*dy, dy, dtype='float64')
z = np.arange(0, lz + 0.1*dz, dz, dtype='float64')

x1 = np.linspace(0,1000,nx)
y1 = np.linspace(0,500,ny)
z1 = np.linspace(0,300,nz)

gridToVTK("./two_faults_info_entropy", z, y, x, cellData = {"info entropy" : H})


Out[25]:
'/Users/flow/git/floddy/python/notebooks/two_faults_info_entropy.vtr'

In [100]:
depth = np.arange(0,10000,100)
half = 3000.
factor = 20.
plot(depth, factor*np.exp(depth/half))
print np.exp(400/250.)


4.9530324244

In [87]:
np.random.normal([10,20],[2,4])


Out[87]:
array([  8.76074229,  23.21646091])

In [84]:
m = 400
stdev = 10
import random
random.gauss(m,stdev)


Out[84]:
407.318787244581

In [ ]: